#!python -m venv maint_env_j
#!ipython kernel install --user --name=maint_env_j
#maint_env_j\Scripts\activate
#!pip install tensorflow
import generateDataFrames
import data
import plotFactory
import stationarity
import arima
import isolationForest
import logger
import os.path
import correlationMatrixPlot
import constants
import deeplearning
import matplotlib.pyplot as plt
if os.path.isfile(logger.Logger.FILENAME) :
os.remove (logger.Logger.FILENAME)
d = generateDataFrames.DataFrameForEachSensor()
d.generateAllDataFrames()
allFiles = d.processedFiles
bandSelected = 300
for file in allFiles:
pf = plotFactory.PlotFactory(file, bandSelected)
#pf.createPlot('TimeSeries')
#pf.createPlot('HeatMap')
#pf.createPlot('BoxPlot')
#pf.createPlot('AnomalyDetectionPlot')
#pf.createPlot('CorrelationMatrixPlot')
fileSensor = allFiles[2]
tsplot = pf.createPlot('TimeSeries')
tsplot.plotAllSensorData(fileSensor, constants.Constants.startDateTimeSeries, constants.Constants.endDateTimeSeries)
tsplot.plotTimeSeries(fileSensor, bandSelected, constants.Constants.startDateTimeSeries, constants.Constants.endDateTimeSeries, ax = None,
lines = [constants.Constants.normalStateDate, constants.Constants.breakingPointDate])
#tsplot.plotTimeSeries(fileSensor, bandSelected, constants.Constants.startDateTimeSeries, constants.Constants.normalStateDate, ax = None)
#tsplot.plotTimeSeries(fileSensor, bandSelected, constants.Constants.normalStateDate, constants.Constants.breakingPointDate, ax = None)
#tsplot.plotTimeSeries(fileSensor, bandSelected, constants.Constants.breakingPointDate, constants.Constants.endDateTimeSeries, ax = None)
WARNING:param.Curve01978: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters WARNING:param.Curve01984: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters
bpplot = pf.createPlot('BoxPlot')
#bpplot.plotAllSensorData(fileSensor, constants.Constants.startDateTimeSeries, constants.Constants.endDateTimeSeries)
bpplot.plotAllSensorData(fileSensor, constants.Constants.startDateTimeSeries, constants.Constants.normalStateDate)
bpplot.plotAllSensorData(fileSensor, constants.Constants.normalStateDate, constants.Constants.breakingPointDate)
hmplot = pf.createPlot('HeatMap')
hmplot.plotAllSensorData(fileSensor, constants.Constants.startDateTimeSeries, constants.Constants.endDateTimeSeries,
lines = [constants.Constants.normalStateDate, constants.Constants.breakingPointDate])
'''
corrMtxNormal = correlationMatrixPlot.CorrelationMatrixPlot()#(fileSensor, bandSelected, constants.Constants.startDateTimeSeries, constants.Constants.normalStateDate)
corrMtxNormal.plotCorrelationMatrix(fileSensor, bandSelected, constants.Constants.startDateTimeSeries, constants.Constants.normalStateDate)
corrMtxBreak = correlationMatrixPlot.CorrelationMatrixPlot()
corrMtxBreak.plotCorrelationMatrix(fileSensor, bandSelected, constants.Constants.normalStateDate, constants.Constants.breakingPointDate)
corrMtxMaint = correlationMatrixPlot.CorrelationMatrixPlot()
corrMtxMaint.plotCorrelationMatrix(fileSensor, bandSelected, constants.Constants.breakingPointDate, constants.Constants.endDateTimeSeries)
'''
corrMtxFull = correlationMatrixPlot.CorrelationMatrixPlot()
corrMtxFull.plotCorrelationMatrix(fileSensor, bandSelected, constants.Constants.startDateTimeSeries, constants.Constants.endDateTimeSeries,
lines = [constants.Constants.normalStateDate, constants.Constants.breakingPointDate])
'''
adplot = pf.createPlot('AnomalyDetectionPlot')
#adplot.plotAllSensorData(fileSensor, constants.Constants.startDateTimeSeries, constants.Constants.endDateTimeSeries)
adplot.plotTimeSeries(fileSensor, bandSelected,
constants.Constants.startDateTimeSeries, constants.Constants.endDateTimeSeries, 1.8, 10,
ax = None)
'''
"\nadplot = pf.createPlot('AnomalyDetectionPlot')\n#adplot.plotAllSensorData(fileSensor, constants.Constants.startDateTimeSeries, constants.Constants.endDateTimeSeries)\nadplot.plotTimeSeries(fileSensor, bandSelected, \n constants.Constants.startDateTimeSeries, constants.Constants.endDateTimeSeries, 1.8, 10, \n ax = None)\n"
dataInstance = data.Data()
df = dataInstance.getDataFrame(fileSensor, bandSelected)
ts = dataInstance.getTimeSeries(fileSensor, bandSelected)
print(df.info())
print("")
print(ts.info())
<class 'pandas.core.frame.DataFrame'> Int64Index: 614 entries, 1465 to 2078 Data columns (total 2 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Date_Time 614 non-null datetime64[ns] 1 Amp 614 non-null float64 dtypes: datetime64[ns](1), float64(1) memory usage: 14.4 KB None <class 'pandas.core.series.Series'> DatetimeIndex: 225 entries, 2007-02-03 23:02:29 to 2008-01-01 23:13:16 Series name: Amp Non-Null Count Dtype -------------- ----- 225 non-null float64 dtypes: float64(1) memory usage: 3.5 KB None
IFmodel = isolationForest.IF(df)
IFmodel.prepareDataset()
IFmodel.getDataSet()
| Date_Time | Amp | Lag | Rolling_Mean | Weekday | Hour | Day | Month | Year | Month_day | Amp_Average | |
|---|---|---|---|---|---|---|---|---|---|---|---|
| 1466 | 2007-02-04 13:44:42 | 0.078103 | 0.082929 | 0.080516 | Sunday | 13 | 6 | 2 | 2007 | 4 | 0.078103 |
| 1467 | 2007-02-04 23:06:30 | 0.077601 | 0.078103 | 0.079545 | Sunday | 23 | 6 | 2 | 2007 | 4 | 0.080746 |
| 1468 | 2007-02-05 23:10:51 | 0.077965 | 0.077601 | 0.079150 | Monday | 23 | 0 | 2 | 2007 | 5 | 0.079890 |
| 1469 | 2007-02-07 23:02:23 | 0.077164 | 0.077965 | 0.078753 | Wednesday | 23 | 2 | 2 | 2007 | 7 | 0.081915 |
| 1470 | 2007-02-10 09:23:25 | 0.072900 | 0.077164 | 0.077777 | Saturday | 9 | 5 | 2 | 2007 | 10 | 0.060400 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 2074 | 2009-01-28 00:01:36 | 0.086462 | 0.073419 | 0.077710 | Wednesday | 0 | 2 | 1 | 2009 | 28 | 0.095004 |
| 2075 | 2009-01-30 00:08:58 | 0.074512 | 0.086462 | 0.077330 | Friday | 0 | 4 | 1 | 2009 | 30 | 0.095166 |
| 2076 | 2009-02-01 00:19:03 | 0.053017 | 0.074512 | 0.074071 | Sunday | 0 | 6 | 2 | 2009 | 1 | 0.092577 |
| 2077 | 2009-02-01 19:07:37 | 0.055891 | 0.053017 | 0.071908 | Sunday | 19 | 6 | 2 | 2009 | 1 | 0.053571 |
| 2078 | 2009-02-02 00:12:53 | 0.075751 | 0.055891 | 0.072330 | Monday | 0 | 0 | 2 | 2009 | 2 | 0.098648 |
613 rows × 11 columns
IFmodel.buildIF()
IFmodel.applyIF()
IsolationForest(contamination=0.005, max_samples=0.7, n_estimators=200,
random_state=0)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. IsolationForest(contamination=0.005, max_samples=0.7, n_estimators=200,
random_state=0)IFmodel.plotIF()
WARNING:param.Curve00958: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters
ts_train = dataInstance.getTimeSeries(fileSensor, bandSelected,
constants.Constants.startDateTimeSeries,
constants.Constants.normalStateDate)
ts_test = dataInstance.getTimeSeries(fileSensor, bandSelected,
constants.Constants.startDateTimeSeries,
constants.Constants.endDateTimeSeries)
print(ts_train)
Date_Time
2007-02-03 23:02:29 0.082929
2007-02-04 13:44:42 0.078103
2007-02-04 23:06:30 0.077601
2007-02-05 23:10:51 0.077965
2007-02-07 23:02:23 0.077164
...
2007-12-28 09:41:10 0.082681
2007-12-28 23:07:47 0.073884
2007-12-30 23:25:52 0.072762
2008-01-01 00:00:39 0.071221
2008-01-01 23:13:16 0.074236
Name: Amp, Length: 225, dtype: float64
enc = deeplearning.Autoencoder(ts_train)
enc.prepareDataset()
enc.buildModel()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv1d (Conv1D) (None, 26, 32) 256
dropout (Dropout) (None, 26, 32) 0
conv1d_1 (Conv1D) (None, 13, 16) 3600
conv1d_transpose (Conv1DTr (None, 26, 16) 1808
anspose)
dropout_1 (Dropout) (None, 26, 16) 0
conv1d_transpose_1 (Conv1D (None, 52, 32) 3616
Transpose)
conv1d_transpose_2 (Conv1D (None, 52, 1) 225
Transpose)
=================================================================
Total params: 9505 (37.13 KB)
Trainable params: 9505 (37.13 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
enc.trainModel()
Epoch 1/50 2/2 [==============================] - 2s 295ms/step - loss: 1.2518 - val_loss: 0.9616 Epoch 2/50 2/2 [==============================] - 0s 41ms/step - loss: 1.2276 - val_loss: 0.9476 Epoch 3/50 2/2 [==============================] - 0s 36ms/step - loss: 1.2041 - val_loss: 0.9328 Epoch 4/50 2/2 [==============================] - 0s 41ms/step - loss: 1.1753 - val_loss: 0.9158 Epoch 5/50 2/2 [==============================] - 0s 39ms/step - loss: 1.1454 - val_loss: 0.8960 Epoch 6/50 2/2 [==============================] - 0s 39ms/step - loss: 1.1126 - val_loss: 0.8734 Epoch 7/50 2/2 [==============================] - 0s 42ms/step - loss: 1.0705 - val_loss: 0.8469 Epoch 8/50 2/2 [==============================] - 0s 49ms/step - loss: 1.0429 - val_loss: 0.8163 Epoch 9/50 2/2 [==============================] - 0s 54ms/step - loss: 1.0039 - val_loss: 0.7800 Epoch 10/50 2/2 [==============================] - 0s 43ms/step - loss: 0.9569 - val_loss: 0.7379 Epoch 11/50 2/2 [==============================] - 0s 38ms/step - loss: 0.9130 - val_loss: 0.6919 Epoch 12/50 2/2 [==============================] - 0s 38ms/step - loss: 0.8545 - val_loss: 0.6444 Epoch 13/50 2/2 [==============================] - 0s 42ms/step - loss: 0.7965 - val_loss: 0.5950 Epoch 14/50 2/2 [==============================] - 0s 42ms/step - loss: 0.7357 - val_loss: 0.5431 Epoch 15/50 2/2 [==============================] - 0s 41ms/step - loss: 0.6697 - val_loss: 0.4901 Epoch 16/50 2/2 [==============================] - 0s 44ms/step - loss: 0.6061 - val_loss: 0.4392 Epoch 17/50 2/2 [==============================] - 0s 40ms/step - loss: 0.5634 - val_loss: 0.3949 Epoch 18/50 2/2 [==============================] - 0s 42ms/step - loss: 0.5176 - val_loss: 0.3590 Epoch 19/50 2/2 [==============================] - 0s 42ms/step - loss: 0.4793 - val_loss: 0.3285 Epoch 20/50 2/2 [==============================] - 0s 40ms/step - loss: 0.4500 - val_loss: 0.3021 Epoch 21/50 2/2 [==============================] - 0s 46ms/step - loss: 0.4075 - val_loss: 0.2800 Epoch 22/50 2/2 [==============================] - 0s 49ms/step - loss: 0.3793 - val_loss: 0.2639 Epoch 23/50 2/2 [==============================] - 0s 42ms/step - loss: 0.3408 - val_loss: 0.2520 Epoch 24/50 2/2 [==============================] - 0s 49ms/step - loss: 0.3148 - val_loss: 0.2443 Epoch 25/50 2/2 [==============================] - 0s 44ms/step - loss: 0.3021 - val_loss: 0.2402 Epoch 26/50 2/2 [==============================] - 0s 42ms/step - loss: 0.2798 - val_loss: 0.2358 Epoch 27/50 2/2 [==============================] - 0s 47ms/step - loss: 0.2858 - val_loss: 0.2327 Epoch 28/50 2/2 [==============================] - 0s 42ms/step - loss: 0.2645 - val_loss: 0.2304 Epoch 29/50 2/2 [==============================] - 0s 46ms/step - loss: 0.2570 - val_loss: 0.2257 Epoch 30/50 2/2 [==============================] - 0s 39ms/step - loss: 0.2564 - val_loss: 0.2172 Epoch 31/50 2/2 [==============================] - 0s 42ms/step - loss: 0.2469 - val_loss: 0.2089 Epoch 32/50 2/2 [==============================] - 0s 47ms/step - loss: 0.2256 - val_loss: 0.2014 Epoch 33/50 2/2 [==============================] - 0s 47ms/step - loss: 0.2315 - val_loss: 0.1945 Epoch 34/50 2/2 [==============================] - 0s 50ms/step - loss: 0.2184 - val_loss: 0.1892 Epoch 35/50 2/2 [==============================] - 0s 45ms/step - loss: 0.2127 - val_loss: 0.1843 Epoch 36/50 2/2 [==============================] - 0s 48ms/step - loss: 0.2110 - val_loss: 0.1807 Epoch 37/50 2/2 [==============================] - 0s 46ms/step - loss: 0.2059 - val_loss: 0.1782 Epoch 38/50 2/2 [==============================] - 0s 45ms/step - loss: 0.2122 - val_loss: 0.1756 Epoch 39/50 2/2 [==============================] - 0s 39ms/step - loss: 0.2007 - val_loss: 0.1737 Epoch 40/50 2/2 [==============================] - 0s 46ms/step - loss: 0.2026 - val_loss: 0.1715 Epoch 41/50 2/2 [==============================] - 0s 40ms/step - loss: 0.1916 - val_loss: 0.1694 Epoch 42/50 2/2 [==============================] - 0s 42ms/step - loss: 0.1811 - val_loss: 0.1685 Epoch 43/50 2/2 [==============================] - 0s 45ms/step - loss: 0.1795 - val_loss: 0.1687 Epoch 44/50 2/2 [==============================] - 0s 48ms/step - loss: 0.1852 - val_loss: 0.1679 Epoch 45/50 2/2 [==============================] - 0s 46ms/step - loss: 0.1835 - val_loss: 0.1647 Epoch 46/50 2/2 [==============================] - 0s 40ms/step - loss: 0.1853 - val_loss: 0.1602 Epoch 47/50 2/2 [==============================] - 0s 37ms/step - loss: 0.1731 - val_loss: 0.1550 Epoch 48/50 2/2 [==============================] - 0s 41ms/step - loss: 0.1680 - val_loss: 0.1518 Epoch 49/50 2/2 [==============================] - 0s 46ms/step - loss: 0.1825 - val_loss: 0.1501 Epoch 50/50 2/2 [==============================] - 0s 44ms/step - loss: 0.1735 - val_loss: 0.1456
WARNING:param.Curve01216: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters WARNING:param.Curve01222: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters
<keras.src.engine.sequential.Sequential at 0x248ebc81780>
enc.setThreshold()
6/6 [==============================] - 0s 3ms/step
WARNING:param.Curve01283: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters WARNING:param.Curve01289: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters
Reconstruction error threshold: 0.31629237184208664
enc.testModel(ts_test)
Test input shape: (563, 52, 1) 18/18 [==============================] - 0s 4ms/step
WARNING:param.Curve01450: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters WARNING:param.Curve01456: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters
Number of anomaly samples: 277
WARNING:param.Curve01649: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters WARNING:param.Curve01655: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters
enc.threshold = 0.55
enc.testModel(ts_test)
Test input shape: (563, 52, 1) 18/18 [==============================] - 0s 3ms/step
WARNING:param.Curve01714: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters WARNING:param.Curve01720: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters
Number of anomaly samples: 89
WARNING:param.Curve01913: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters WARNING:param.Curve01919: Setting non-parameter attribute yformatter=%.2f using a mechanism intended only for parameters